@@ -1,2 +1,4 @@ |
||
1 | 1 |
*.psd |
2 | 2 |
dump.rdb |
3 |
+.DS_Store |
|
4 |
+public/.DS_Store |
@@ -0,0 +1,161 @@ |
||
1 |
+require 'json' |
|
2 |
+require 'ostruct' |
|
3 |
+ |
|
4 |
+class WhoisParser |
|
5 |
+ |
|
6 |
+ def parse(data, whois_lookup, url) |
|
7 |
+ |
|
8 |
+ @parsed_data = Hash.new |
|
9 |
+ |
|
10 |
+ domain_type = url.split(".").last |
|
11 |
+ |
|
12 |
+ # check if the domain is registered |
|
13 |
+ if data["registered?"] == nil |
|
14 |
+ if check_domain_availability(data, whois_lookup) |
|
15 |
+ data["registered?"] = true |
|
16 |
+ data["available?"] = false |
|
17 |
+ else |
|
18 |
+ data["registered?"] = false |
|
19 |
+ data["available?"] = true |
|
20 |
+ end |
|
21 |
+ end |
|
22 |
+ |
|
23 |
+ if data["registered?"] |
|
24 |
+ case domain_type |
|
25 |
+ when "br" |
|
26 |
+ parserBr(data, whois_lookup, url) |
|
27 |
+ when "network" |
|
28 |
+ parserNetwork(data, whois_lookup, url) |
|
29 |
+ else |
|
30 |
+ parserCom(data, whois_lookup, url) |
|
31 |
+ end |
|
32 |
+ |
|
33 |
+ else |
|
34 |
+ # Domain |
|
35 |
+ @parsed_data["domain"] = url |
|
36 |
+ end |
|
37 |
+ |
|
38 |
+ # Registered? |
|
39 |
+ @parsed_data["registered?"] = data["registered?"] |
|
40 |
+ # Available? |
|
41 |
+ @parsed_data["available?"] = data["available?"] |
|
42 |
+ # Parsed whois lookup data |
|
43 |
+ #@parsed_data["whois_lookup"] = data |
|
44 |
+ # Raw whois lookup data |
|
45 |
+ #@parsed_data["raw"] = whois_lookup.to_s |
|
46 |
+ |
|
47 |
+ return JSON.pretty_generate(@parsed_data) |
|
48 |
+ end |
|
49 |
+ |
|
50 |
+ def parserCom(data, whois_lookup, url) |
|
51 |
+ # Domain Name |
|
52 |
+ if data["domain"] != nil |
|
53 |
+ @parsed_data["domain"] = data["domain"] |
|
54 |
+ else |
|
55 |
+ @parsed_data["domain"] = url |
|
56 |
+ end |
|
57 |
+ # Owner |
|
58 |
+ owner_data = whois_lookup.to_s.scan(/Registrant\sName:\s([^\\]+)/).first |
|
59 |
+ @parsed_data["owner"] = spliter(owner_data, "\r").downcase.split.map(&:capitalize).join(' ') |
|
60 |
+ # Registrar |
|
61 |
+ @parsed_data["registrar"] = data["registrar"][:name] |
|
62 |
+ # Creation Date |
|
63 |
+ temp = whois_lookup.to_s.scan(/Creation\sDate:\s([^\\]+)/).first.first |
|
64 |
+ creation_date_data = temp.scan(/Creation\sDate:\s([^\\]+)/).last |
|
65 |
+ @parsed_data["created_on"] = spliter(creation_date_data, "\n") |
|
66 |
+ # Expiration Date |
|
67 |
+ temp = whois_lookup.to_s.scan(/Expiration\sDate:\s([^\\]+)/).first.first |
|
68 |
+ expiration_date_data = temp.scan(/Expiration\sDate:\s([^\\]+)/).last |
|
69 |
+ @parsed_data["expires_on"] = spliter(expiration_date_data, "\n") |
|
70 |
+ # Update Date |
|
71 |
+ @parsed_data["update_on"] = data["updated_on"] |
|
72 |
+ end |
|
73 |
+ |
|
74 |
+ def parserBr(data, whois_lookup, url) |
|
75 |
+ # Domain Name |
|
76 |
+ domain_data = whois_lookup.to_s.scan(/domain:\s([^\\]+)/) |
|
77 |
+ if domain_data.class == Array |
|
78 |
+ if domain_data.first.class == Array |
|
79 |
+ domain = domain_data.first.first.split("\r").first.strip |
|
80 |
+ else |
|
81 |
+ domain = domain_data.first.split("\r").first.strip |
|
82 |
+ end |
|
83 |
+ else |
|
84 |
+ domain = domain_data.split("\r").first.strip |
|
85 |
+ end |
|
86 |
+ if domain != "" |
|
87 |
+ @parsed_data["domain"] = domain |
|
88 |
+ else |
|
89 |
+ @parsed_data["domain"] = url |
|
90 |
+ end |
|
91 |
+ # Owner |
|
92 |
+ owner_data = whois_lookup.to_s.scan(/owner:\s([^\\]+)/).first |
|
93 |
+ @parsed_data["owner"] = spliter(owner_data, "\r").force_encoding("ISO-8859-1").encode("UTF-8") |
|
94 |
+ # Registrar |
|
95 |
+ @parsed_data["registrar"] = "Registro BR" |
|
96 |
+ # Creation Date |
|
97 |
+ @parsed_data["created_on"] = data["created_on"] |
|
98 |
+ # Expiration Date |
|
99 |
+ @parsed_data["expires_on"] = data["expires_on"] |
|
100 |
+ # Update Date |
|
101 |
+ @parsed_data["updated_on"] = data["updated_on"] |
|
102 |
+ end |
|
103 |
+ |
|
104 |
+ def parserNetwork(data, whois_lookup, url) |
|
105 |
+ # Domain Name |
|
106 |
+ domain_data = whois_lookup.to_s.scan(/Domain\sName:\s([^\\]+)/) |
|
107 |
+ if domain_data.class == Array |
|
108 |
+ if domain_data.first.class == Array |
|
109 |
+ domain = domain_data.first.first.split("\r").first.strip |
|
110 |
+ else |
|
111 |
+ domain = domain_data.first.split("\r").first.strip |
|
112 |
+ end |
|
113 |
+ else |
|
114 |
+ domain = domain_data.split("\r").first.strip |
|
115 |
+ end |
|
116 |
+ if domain != "" |
|
117 |
+ @parsed_data["domain"] = domain |
|
118 |
+ else |
|
119 |
+ @parsed_data["domain"] = url |
|
120 |
+ end |
|
121 |
+ # Owner |
|
122 |
+ owner_data = whois_lookup.to_s.scan(/Registrant\sName:\s([^\\]+)/).first |
|
123 |
+ @parsed_data["owner"] = spliter(owner_data, "\r") |
|
124 |
+ # Registrar |
|
125 |
+ registrar_data = whois_lookup.to_s.scan(/Registrar:\s([^\\]+)/) |
|
126 |
+ @parsed_data["registrar"] = spliter(registrar_data) |
|
127 |
+ # Creation Date |
|
128 |
+ creation_date_data = whois_lookup.to_s.scan(/Creation\sDate:\s([^\\]+)/).first |
|
129 |
+ @parsed_data["created_on"] = spliter(creation_date_data, "\n") |
|
130 |
+ # Expiration Date |
|
131 |
+ expiration_date_data = whois_lookup.to_s.scan(/Expiry\sDate:\s([^\\]+)/).first |
|
132 |
+ @parsed_data["expires_on"] = spliter(expiration_date_data, "\n") |
|
133 |
+ # Update Date |
|
134 |
+ update_date_data = whois_lookup.to_s.scan(/Last\supdate\sof\sWHOIS\sdatabase:\s([^\\]+)/).first |
|
135 |
+ @parsed_data["update_on"] = spliter(update_date_data, "<") |
|
136 |
+ end |
|
137 |
+ |
|
138 |
+ def spliter(data, split_char = "\r") |
|
139 |
+ if data.class == Array |
|
140 |
+ if data.first.class == Array |
|
141 |
+ clean_data = data.first.first.split(split_char).first.strip |
|
142 |
+ else |
|
143 |
+ clean_data = data.first.split(split_char).first.strip |
|
144 |
+ end |
|
145 |
+ else |
|
146 |
+ clean_data = data.split(split_char).first.strip |
|
147 |
+ end |
|
148 |
+ return clean_data |
|
149 |
+ end |
|
150 |
+ |
|
151 |
+ def check_domain_availability(data, whois_lookup) |
|
152 |
+ data1 = whois_lookup.to_s.scan(/No\smatch\sfor/).first |
|
153 |
+ data2 = whois_lookup.to_s.scan(/domain\snot\sfound/).first |
|
154 |
+ if data1 == nil || data2 == nil |
|
155 |
+ return true |
|
156 |
+ else |
|
157 |
+ return false |
|
158 |
+ end |
|
159 |
+ end |
|
160 |
+ |
|
161 |
+end |
@@ -4,6 +4,7 @@ require 'haml' |
||
4 | 4 |
require 'json' |
5 | 5 |
require 'ostruct' |
6 | 6 |
require "sinatra/reloader" if development? |
7 |
+require_relative 'json_parser' |
|
7 | 8 |
|
8 | 9 |
# ruby crimes |
9 | 10 |
class Whois::Record |
@@ -33,6 +34,8 @@ class Whois::Record |
||
33 | 34 |
end |
34 | 35 |
end |
35 | 36 |
|
37 |
+Whois::Server.define :tld, ".network", "whois.donuts.co" |
|
38 |
+ |
|
36 | 39 |
before do |
37 | 40 |
response['Access-Control-Allow-Origin'] = '*' |
38 | 41 |
end |
@@ -54,6 +57,9 @@ helpers do |
||
54 | 57 |
end |
55 | 58 |
end |
56 | 59 |
|
60 |
+set :views, File.dirname(__FILE__) + '/views' |
|
61 |
+set :public_folder, File.dirname(__FILE__) + '/public' |
|
62 |
+ |
|
57 | 63 |
get '/' do |
58 | 64 |
cache_for_day |
59 | 65 |
haml :index |
@@ -73,10 +79,12 @@ end |
||
73 | 79 |
get '/lookup.json' do |
74 | 80 |
content_type 'application/json' |
75 | 81 |
begin |
76 |
- cache_for_day |
|
77 |
- JSON.pretty_generate(whois_lookup.to_h) |
|
82 |
+ #cache_for_day |
|
83 |
+ parser = WhoisParser.new |
|
84 |
+ data = whois_lookup.to_h |
|
85 |
+ parser.parse(data, whois_lookup, params[:url]) |
|
78 | 86 |
rescue Exception => e |
79 | 87 |
@error = e |
80 | 88 |
{ :error => @error }.to_json |
81 | 89 |
end |
82 |
-end |
|
90 |
+end |
@@ -9,4 +9,4 @@ |
||
9 | 9 |
|
10 | 10 |
%form{ :action => "/lookup", :method => "get"} |
11 | 11 |
%input{ :type => "text", :name => "url"} |
12 |
- %input.submit{ :type => "submit", :value => "Lookup Whois"} |
|
12 |
+ %input.submit{ :type => "submit", :value => "Lookup Whois"} |